var runtime.memstats

141 uses

	runtime (current package)
		arena.go#L895: 	stats := memstats.heapStats.acquire()
		arena.go#L900: 	memstats.heapStats.release()
		arena.go#L1076: 	stats := memstats.heapStats.acquire()
		arena.go#L1083: 	memstats.heapStats.release()
		heapdump.go#L710: 		sysFree(unsafe.Pointer(&tmpbuf[0]), uintptr(len(tmpbuf)), &memstats.other_sys)
		heapdump.go#L728: 			sysFree(unsafe.Pointer(&tmpbuf[0]), uintptr(len(tmpbuf)), &memstats.other_sys)
		heapdump.go#L731: 		p := sysAlloc(n, &memstats.other_sys, "heapdump")
		iface.go#L77: 	m = (*itab)(persistentalloc(unsafe.Sizeof(itab{})+uintptr(len(inter.Methods)-1)*goarch.PtrSize, 0, &memstats.other_sys))
		malloc.go#L790: 		r = (*heapArena)(h.heapArenaAlloc.alloc(unsafe.Sizeof(*r), goarch.PtrSize, &memstats.gcMiscSys, "heap metadata"))
		malloc.go#L792: 			r = (*heapArena)(persistentalloc(unsafe.Sizeof(*r), goarch.PtrSize, &memstats.gcMiscSys))
		malloc.go#L804: 			newArray := (*notInHeap)(persistentalloc(size, goarch.PtrSize, &memstats.gcMiscSys))
		malloc.go#L1659: 		return persistentalloc(size, align, &memstats.other_sys)
		malloc.go#L1964: 		persistent.base = (*notInHeap)(sysAlloc(persistentChunkSize, &memstats.other_sys, "immortal metadata"))
		malloc.go#L1989: 	if sysStat != &memstats.other_sys {
		malloc.go#L1991: 		memstats.other_sys.add(-int64(size))
		mbitmap.go#L1494: 	x := (*[1 << 30]byte)(persistentalloc(n+1, 1, &memstats.buckhash_sys))[:n+1]
		mcache.go#L164: 		stats := memstats.heapStats.acquire()
		mcache.go#L173: 		memstats.heapStats.release()
		mcache.go#L242: 	stats := memstats.heapStats.acquire()
		mcache.go#L245: 	memstats.heapStats.release()
		mcache.go#L283: 			stats := memstats.heapStats.acquire()
		mcache.go#L285: 			memstats.heapStats.release()
		mcache.go#L310: 	stats := memstats.heapStats.acquire()
		mcache.go#L313: 	memstats.heapStats.release()
		mcheckmark.go#L48: 			bitmap = (*checkmarksMap)(persistentalloc(unsafe.Sizeof(*bitmap), 0, &memstats.gcMiscSys))
		mcleanup.go#L388: 			b = (*cleanupBlock)(persistentalloc(cleanupBlockSize, tagAlign, &memstats.gcMiscSys))
		metrics.go#L606: 	memstats.heapStats.read(&a.heapStatsDelta)
		metrics.go#L648: 	a.stacksSys = memstats.stacks_sys.load()
		metrics.go#L649: 	a.buckHashSys = memstats.buckhash_sys.load()
		metrics.go#L650: 	a.gcMiscSys = memstats.gcMiscSys.load()
		metrics.go#L651: 	a.otherSys = memstats.other_sys.load()
		metrics.go#L653: 	a.gcCyclesDone = uint64(memstats.numgc)
		metrics.go#L654: 	a.gcCyclesForced = uint64(memstats.numforcedgc)
		metrics.go#L658: 		a.mSpanSys = memstats.mspan_sys.load()
		metrics.go#L660: 		a.mCacheSys = memstats.mcache_sys.load()
		mfinal.go#L116: 			finc = (*finBlock)(persistentalloc(finBlockSize, 0, &memstats.gcMiscSys))
		mgc.go#L216: 	memstats.enablegc = true // now that runtime is initialized, GC is okay
		mgc.go#L617: 	if !memstats.enablegc || panicking.Load() != 0 || gcphase != _GCoff {
		mgc.go#L628: 		lastgc := int64(atomic.Load64(&memstats.last_gc_nanotime))
		mgc.go#L722: 			p.gcw.ptrBuf = (*[gc.PageSize / goarch.PtrSize]uintptr)(persistentalloc(gc.PageSize, goarch.PtrSize, &memstats.gcMiscSys))
		mgc.go#L1101: 	memstats.lastHeapInUse = gcController.heapInUse.load()
		mgc.go#L1113: 	atomic.Store64(&memstats.last_gc_unix, uint64(unixNow)) // must be Unix time to make sense to user
		mgc.go#L1114: 	atomic.Store64(&memstats.last_gc_nanotime, uint64(now)) // monotonic time for us
		mgc.go#L1115: 	memstats.pause_ns[memstats.numgc%uint32(len(memstats.pause_ns))] = uint64(work.pauseNS)
		mgc.go#L1116: 	memstats.pause_end[memstats.numgc%uint32(len(memstats.pause_end))] = uint64(unixNow)
		mgc.go#L1117: 	memstats.pause_total_ns += uint64(work.pauseNS)
		mgc.go#L1132: 	memstats.gc_cpu_fraction = float64(work.cpuStats.GCTotalTime-work.cpuStats.GCIdleTime) / float64(work.cpuStats.TotalTime)
		mgc.go#L1145: 		memstats.numforcedgc++
		mgc.go#L1150: 	memstats.numgc++
		mgc.go#L1225: 		clear(memstats.lastScanStats[:])
		mgc.go#L1237: 			pp.gcw.flushScanStats(&memstats.lastScanStats)
		mgc.go#L1255: 		util := int(memstats.gc_cpu_fraction * 100)
		mgc.go#L1259: 		print("gc ", memstats.numgc,
		mgclimit.go#L281: 			l.lastEnabledCycle.Store(memstats.numgc + 1)
		mgclimit.go#L333: 		l.lastEnabledCycle.Store(memstats.numgc + 1)
		mgcmark_nogreenteagc.go#L90: 	for _, stats := range memstats.lastScanStats {
		mgcmark_nogreenteagc.go#L94: 	for i, stats := range memstats.lastScanStats {
		mgcscavenge.go#L203: 	gcPercentGoal := uint64(float64(memstats.lastHeapInUse) * goalRatio)
		mgcscavenge.go#L786: 				stats := memstats.heapStats.acquire()
		mgcscavenge.go#L789: 				memstats.heapStats.release()
		mgcsweep.go#L772: 			stats := memstats.heapStats.acquire()
		mgcsweep.go#L774: 			memstats.heapStats.release()
		mgcsweep.go#L807: 			stats := memstats.heapStats.acquire()
		mgcsweep.go#L810: 			memstats.heapStats.release()
		mheap.go#L561: 		sp.array = sysAlloc(uintptr(n)*goarch.PtrSize, &memstats.other_sys, "allspans array")
		mheap.go#L573: 			sysFree(unsafe.Pointer(&oldAllspans[0]), uintptr(cap(oldAllspans))*unsafe.Sizeof(oldAllspans[0]), &memstats.other_sys)
		mheap.go#L795: 	h.spanalloc.init(unsafe.Sizeof(mspan{}), recordspan, unsafe.Pointer(h), &memstats.mspan_sys)
		mheap.go#L796: 	h.cachealloc.init(unsafe.Sizeof(mcache{}), nil, nil, &memstats.mcache_sys)
		mheap.go#L797: 	h.specialfinalizeralloc.init(unsafe.Sizeof(specialfinalizer{}), nil, nil, &memstats.other_sys)
		mheap.go#L798: 	h.specialCleanupAlloc.init(unsafe.Sizeof(specialCleanup{}), nil, nil, &memstats.other_sys)
		mheap.go#L799: 	h.specialCheckFinalizerAlloc.init(unsafe.Sizeof(specialCheckFinalizer{}), nil, nil, &memstats.other_sys)
		mheap.go#L800: 	h.specialTinyBlockAlloc.init(unsafe.Sizeof(specialTinyBlock{}), nil, nil, &memstats.other_sys)
		mheap.go#L801: 	h.specialprofilealloc.init(unsafe.Sizeof(specialprofile{}), nil, nil, &memstats.other_sys)
		mheap.go#L802: 	h.specialReachableAlloc.init(unsafe.Sizeof(specialReachable{}), nil, nil, &memstats.other_sys)
		mheap.go#L803: 	h.specialPinCounterAlloc.init(unsafe.Sizeof(specialPinCounter{}), nil, nil, &memstats.other_sys)
		mheap.go#L804: 	h.specialWeakHandleAlloc.init(unsafe.Sizeof(specialWeakHandle{}), nil, nil, &memstats.gcMiscSys)
		mheap.go#L805: 	h.specialBubbleAlloc.init(unsafe.Sizeof(specialBubble{}), nil, nil, &memstats.other_sys)
		mheap.go#L806: 	h.arenaHintAlloc.init(unsafe.Sizeof(arenaHint{}), nil, nil, &memstats.other_sys)
		mheap.go#L823: 	h.pages.init(&h.lock, &memstats.gcMiscSys, false)
		mheap.go#L1413: 	stats := memstats.heapStats.acquire()
		mheap.go#L1424: 	memstats.heapStats.release()
		mheap.go#L1587: 				stats := memstats.heapStats.acquire()
		mheap.go#L1589: 				memstats.heapStats.release()
		mheap.go#L1620: 	stats := memstats.heapStats.acquire()
		mheap.go#L1622: 	memstats.heapStats.release()
		mheap.go#L1735: 	stats := memstats.heapStats.acquire()
		mheap.go#L1744: 	memstats.heapStats.release()
		mheap.go#L2722: 				newNode = (*immortalWeakHandle)(persistentalloc(unsafe.Sizeof(immortalWeakHandle{}), goarch.PtrSize, &memstats.gcMiscSys))
		mheap.go#L3021: 		result = (*gcBitsArena)(sysAlloc(gcBitsChunkBytes, &memstats.gcMiscSys, "gc bits"))
		mprof.go#L239: 	b := (*bucket)(persistentalloc(size, 0, &memstats.buckhash_sys))
		mprof.go#L282: 			bh = (*buckhashArray)(sysAlloc(unsafe.Sizeof(buckhashArray{}), &memstats.buckhash_sys, "profiler hash buckets"))
		mspanset.go#L113: 			newSpine := persistentalloc(newCap*goarch.PtrSize, cpu.CacheLineSize, &memstats.gcMiscSys)
		mspanset.go#L325: 	return (*spanSetBlock)(persistentalloc(unsafe.Sizeof(spanSetBlock{}), max(cpu.CacheLineSize, tagAlign), &memstats.gcMiscSys))
		mstats.go#L52: var memstats mstats
		mstats.go#L338: 	if offset := unsafe.Offsetof(memstats.heapStats); offset%8 != 0 {
		mstats.go#L394: 	memstats.heapStats.unsafeRead(&consStats)
		mstats.go#L439: 		memstats.stacks_sys.load() + memstats.mspan_sys.load() + memstats.mcache_sys.load() +
		mstats.go#L440: 		memstats.buckhash_sys.load() + memstats.gcMiscSys.load() + memstats.other_sys.load() +
		mstats.go#L538: 	stats.StackSys = stackInUse + memstats.stacks_sys.load()
		mstats.go#L540: 	stats.MSpanSys = memstats.mspan_sys.load()
		mstats.go#L542: 	stats.MCacheSys = memstats.mcache_sys.load()
		mstats.go#L543: 	stats.BuckHashSys = memstats.buckhash_sys.load()
		mstats.go#L547: 	stats.GCSys = memstats.gcMiscSys.load() + gcWorkBufInUse
		mstats.go#L548: 	stats.OtherSys = memstats.other_sys.load()
		mstats.go#L550: 	stats.LastGC = memstats.last_gc_unix
		mstats.go#L551: 	stats.PauseTotalNs = memstats.pause_total_ns
		mstats.go#L552: 	stats.PauseNs = memstats.pause_ns
		mstats.go#L553: 	stats.PauseEnd = memstats.pause_end
		mstats.go#L554: 	stats.NumGC = memstats.numgc
		mstats.go#L555: 	stats.NumForcedGC = memstats.numforcedgc
		mstats.go#L556: 	stats.GCCPUFraction = memstats.gc_cpu_fraction
		mstats.go#L580: 	if cap(p) < len(memstats.pause_ns)+3 {
		mstats.go#L587: 	n := memstats.numgc
		mstats.go#L588: 	if n > uint32(len(memstats.pause_ns)) {
		mstats.go#L589: 		n = uint32(len(memstats.pause_ns))
		mstats.go#L598: 		j := (memstats.numgc - 1 - i) % uint32(len(memstats.pause_ns))
		mstats.go#L599: 		p[i] = memstats.pause_ns[j]
		mstats.go#L600: 		p[n+i] = memstats.pause_end[j]
		mstats.go#L603: 	p[n+n] = memstats.last_gc_unix
		mstats.go#L604: 	p[n+n+1] = uint64(memstats.numgc)
		mstats.go#L605: 	p[n+n+2] = memstats.pause_total_ns
		netpoll.go#L702: 		mem := persistentalloc(n*pdSize, tagAlign, &memstats.other_sys)
		os_linux.go#L210: 	stack := sysAlloc(stacksize, &memstats.stacks_sys, "OS thread stack")
		proc.go#L851: 	lockInit(&memstats.heapStats.noPLock, lockRankLeafRank)
		stack.go#L361: 		v := sysAlloc(uintptr(n), &memstats.stacks_sys, "goroutine stack (system)")
		stack.go#L481: 			sysFree(v, n, &memstats.stacks_sys)
		trace.go#L680: 				sysFree(unsafe.Pointer(buf), unsafe.Sizeof(*buf), &memstats.other_sys)
		tracebuf.go#L168: 			w.traceBuf = (*traceBuf)(sysAlloc(unsafe.Sizeof(traceBuf{}), &memstats.other_sys, "trace buffer"))
		traceregion.go#L85: 		block = (*traceRegionAllocBlock)(sysAlloc(unsafe.Sizeof(traceRegionAllocBlock{}), &memstats.other_sys, "trace arena alloc"))
		traceregion.go#L111: 		sysFree(unsafe.Pointer(block), unsafe.Sizeof(traceRegionAllocBlock{}), &memstats.other_sys)
		traceregion.go#L114: 		sysFree(current, unsafe.Sizeof(traceRegionAllocBlock{}), &memstats.other_sys)
		type.go#L135: 			p = (*byte)(persistentalloc(bytes, goarch.PtrSize, &memstats.other_sys))